In [21]:
# Import neccesary pacakges
import os
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense
from tensorflow.keras.preprocessing.image import load_img, img_to_array
from sklearn.metrics import confusion_matrix, precision_score
from sklearn.metrics import accuracy_score

Load the in the images in the train and validation data

In [2]:
# Specify the path to the train folder
#train_folder = 'C:/Users/sofiy/vehicle_dataset/Train'

# Define the image data generator for the train set
datagen = ImageDataGenerator(rescale=1.0/255)

# Load the images from the train folder using the data generator
train_generator = datagen.flow_from_directory(
    os.path.join("C:/Users/sofiy/vehicle_dataset/Train"), # path to train folder
    target_size=(64, 64),
    batch_size=1,
    class_mode='categorical',
    shuffle= False
)

# Define the image data generator for the validation set
validation_datagen = ImageDataGenerator(rescale=1.0/255)

# Load the images from the validation folder using the data generator
validation_generator = validation_datagen.flow_from_directory(
    os.path.join("C:/Users/sofiy/vehicle_dataset/Validation"),    # Path to validation folder
    target_size=(64, 64),
    batch_size=1,
    class_mode='categorical',
    shuffle=False
)
Found 3394 images belonging to 6 classes.
Found 1127 images belonging to 6 classes.

Randomly display an image from each class

In [3]:
# Get the class labels and their corresponding indices
class_labels = train_generator.class_indices

# Create a dictionary to store the first image path from each class
class_image_paths = {}

# Iterate over the train generator to find the first image path from each class
for i in range(len(train_generator)):
    # Get the image and label from the generator
    image, label = train_generator[i]

    # Get the class label index
    class_index = label.argmax()

    # Get the class label name
    class_name = list(class_labels.keys())[class_index]

    # If the class is not yet added to the dictionary, store its image path
    if class_name not in class_image_paths:
        image_path = train_generator.filepaths[i]
        class_image_paths[class_name] = image_path

    # If images from all classes are found, break the loop
    if len(class_image_paths) == len(class_labels):
        break

# Create a grid of subplots
fig, axs = plt.subplots(len(class_image_paths) // 4 + 1, 4, figsize=(15, 15))

# Flatten the axs array for easier indexing
axs = axs.flatten()

# Display the images from each class
for i, (class_name, image_path) in enumerate(class_image_paths.items()):
    # Load the image
    image = plt.imread(image_path)

    # Display the image in the corresponding subplot
    axs[i].imshow(image)
    axs[i].set_title(class_name)
    axs[i].axis('off')

    # Break the loop if all subplots are filled
    if i + 1 == len(axs):
        break

# Hide any remaining empty subplots
for j in range(len(class_image_paths), len(axs)):
    axs[j].axis('off')

# Adjust the spacing between subplots
plt.tight_layout()

# Show the plots
plt.show()

Create and train the first model using data augumentaion

In [4]:
# Define the batch size
batch_size = 32

# Define the image data generator for the train set with data augmentation
train_datagen_aug = ImageDataGenerator(
    rescale=1.0/255,
    rotation_range=20,
    width_shift_range=0.2,
    height_shift_range=0.2,
    shear_range=0.2,
    zoom_range=0.2,
    horizontal_flip=True
)

# Load the images from the train folder using the augmented data generator
train_generator_aug = train_datagen_aug.flow_from_directory(
    os.path.join("C:/Users/sofiy/vehicle_dataset/Train"),
    target_size=(64, 64),
    batch_size=batch_size,
    class_mode='categorical',
    shuffle=False
)

# Define the image data generator for the validation set (no augmentation for validation)
validation_datagen = ImageDataGenerator(rescale=1.0/255)

# Load the images from the validation folder using the data generator
validation_generator = validation_datagen.flow_from_directory(
    os.path.join("C:/Users/sofiy/vehicle_dataset/Validation"),    # Path to validation folder
    target_size=(64, 64),
    batch_size=batch_size,
    class_mode='categorical',
    shuffle=False
)

# Define the model
model2 = Sequential()
model2.add(Conv2D(64, (5, 5), activation='relu', input_shape=(64, 64, 3)))
model2.add(MaxPooling2D((2, 2)))
model2.add(Conv2D(32, (3, 3), activation='relu'))
model2.add(MaxPooling2D((2, 2)))
model2.add(Flatten())
model2.add(Dense(128, activation='relu'))
model2.add(Dense(6, activation='softmax'))

# Compile the model
model2.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])

# Train the new model using the train generator with data augmentation
fit2 = model2.fit(train_generator_aug, epochs=100, batch_size=batch_size, validation_data=validation_generator)
Found 3394 images belonging to 6 classes.
Found 1127 images belonging to 6 classes.
Epoch 1/100
107/107 [==============================] - 111s 1s/step - loss: 1.8008 - accuracy: 0.1959 - val_loss: 1.7737 - val_accuracy: 0.2626
Epoch 2/100
107/107 [==============================] - 104s 972ms/step - loss: 1.7694 - accuracy: 0.3100 - val_loss: 1.6987 - val_accuracy: 0.2626
Epoch 3/100
107/107 [==============================] - 107s 1s/step - loss: 1.6606 - accuracy: 0.3332 - val_loss: 1.7130 - val_accuracy: 0.2626
Epoch 4/100
107/107 [==============================] - 104s 972ms/step - loss: 1.7502 - accuracy: 0.3462 - val_loss: 1.6723 - val_accuracy: 0.2626
Epoch 5/100
107/107 [==============================] - 108s 1s/step - loss: 1.6450 - accuracy: 0.3462 - val_loss: 1.6819 - val_accuracy: 0.2626
Epoch 6/100
107/107 [==============================] - 103s 965ms/step - loss: 1.6639 - accuracy: 0.3689 - val_loss: 1.6048 - val_accuracy: 0.3416
Epoch 7/100
107/107 [==============================] - 106s 993ms/step - loss: 1.5874 - accuracy: 0.3695 - val_loss: 1.6152 - val_accuracy: 0.3469
Epoch 8/100
107/107 [==============================] - 106s 989ms/step - loss: 1.5337 - accuracy: 0.4163 - val_loss: 1.5361 - val_accuracy: 0.3638
Epoch 9/100
107/107 [==============================] - 108s 1s/step - loss: 1.5079 - accuracy: 0.4207 - val_loss: 1.5385 - val_accuracy: 0.3620
Epoch 10/100
107/107 [==============================] - 107s 994ms/step - loss: 1.4789 - accuracy: 0.4290 - val_loss: 1.5285 - val_accuracy: 0.3576
Epoch 11/100
107/107 [==============================] - 106s 993ms/step - loss: 1.4756 - accuracy: 0.4278 - val_loss: 1.5101 - val_accuracy: 0.3549
Epoch 12/100
107/107 [==============================] - 105s 978ms/step - loss: 1.4998 - accuracy: 0.4225 - val_loss: 1.5087 - val_accuracy: 0.3585
Epoch 13/100
107/107 [==============================] - 103s 958ms/step - loss: 1.4740 - accuracy: 0.4228 - val_loss: 1.4968 - val_accuracy: 0.3620
Epoch 14/100
107/107 [==============================] - 105s 983ms/step - loss: 1.4689 - accuracy: 0.4287 - val_loss: 1.4578 - val_accuracy: 0.3718
Epoch 15/100
107/107 [==============================] - 104s 978ms/step - loss: 1.4995 - accuracy: 0.4369 - val_loss: 1.4856 - val_accuracy: 0.3789
Epoch 16/100
107/107 [==============================] - 109s 1s/step - loss: 1.4481 - accuracy: 0.4378 - val_loss: 1.4460 - val_accuracy: 0.3771
Epoch 17/100
107/107 [==============================] - 107s 996ms/step - loss: 1.4025 - accuracy: 0.4570 - val_loss: 1.4229 - val_accuracy: 0.4046
Epoch 18/100
107/107 [==============================] - 99s 927ms/step - loss: 1.3750 - accuracy: 0.4532 - val_loss: 1.3157 - val_accuracy: 0.4667
Epoch 19/100
107/107 [==============================] - 100s 933ms/step - loss: 1.3228 - accuracy: 0.4567 - val_loss: 1.2869 - val_accuracy: 0.5173
Epoch 20/100
107/107 [==============================] - 116s 1s/step - loss: 1.3091 - accuracy: 0.4761 - val_loss: 1.2951 - val_accuracy: 0.4845
Epoch 21/100
107/107 [==============================] - 116s 1s/step - loss: 1.3898 - accuracy: 0.4682 - val_loss: 1.2490 - val_accuracy: 0.5040
Epoch 22/100
107/107 [==============================] - 109s 1s/step - loss: 1.3261 - accuracy: 0.4723 - val_loss: 1.3405 - val_accuracy: 0.4357
Epoch 23/100
107/107 [==============================] - 113s 1s/step - loss: 1.3181 - accuracy: 0.4853 - val_loss: 1.2086 - val_accuracy: 0.5333
Epoch 24/100
107/107 [==============================] - 114s 1s/step - loss: 1.2717 - accuracy: 0.5165 - val_loss: 1.1532 - val_accuracy: 0.5421
Epoch 25/100
107/107 [==============================] - 119s 1s/step - loss: 1.2701 - accuracy: 0.5212 - val_loss: 1.2023 - val_accuracy: 0.5049
Epoch 26/100
107/107 [==============================] - 114s 1s/step - loss: 1.2654 - accuracy: 0.5180 - val_loss: 1.1562 - val_accuracy: 0.5475
Epoch 27/100
107/107 [==============================] - 110s 1s/step - loss: 1.2484 - accuracy: 0.5127 - val_loss: 1.1294 - val_accuracy: 0.5670
Epoch 28/100
107/107 [==============================] - 110s 1s/step - loss: 1.2198 - accuracy: 0.5245 - val_loss: 1.1187 - val_accuracy: 0.5883
Epoch 29/100
107/107 [==============================] - 107s 1s/step - loss: 1.2246 - accuracy: 0.5312 - val_loss: 1.1648 - val_accuracy: 0.5909
Epoch 30/100
107/107 [==============================] - 104s 974ms/step - loss: 1.1981 - accuracy: 0.5480 - val_loss: 1.0722 - val_accuracy: 0.5723
Epoch 31/100
107/107 [==============================] - 104s 977ms/step - loss: 1.1903 - accuracy: 0.5345 - val_loss: 1.0383 - val_accuracy: 0.5759
Epoch 32/100
107/107 [==============================] - 109s 1s/step - loss: 1.1628 - accuracy: 0.5557 - val_loss: 1.0176 - val_accuracy: 0.5839
Epoch 33/100
107/107 [==============================] - 105s 979ms/step - loss: 1.1498 - accuracy: 0.5516 - val_loss: 1.1495 - val_accuracy: 0.5288
Epoch 34/100
107/107 [==============================] - 103s 962ms/step - loss: 1.1610 - accuracy: 0.5392 - val_loss: 1.0075 - val_accuracy: 0.6335
Epoch 35/100
107/107 [==============================] - 102s 954ms/step - loss: 1.1632 - accuracy: 0.5451 - val_loss: 1.0359 - val_accuracy: 0.6193
Epoch 36/100
107/107 [==============================] - 109s 1s/step - loss: 1.1321 - accuracy: 0.5716 - val_loss: 1.0270 - val_accuracy: 0.5954
Epoch 37/100
107/107 [==============================] - 112s 1s/step - loss: 1.1176 - accuracy: 0.5728 - val_loss: 1.0426 - val_accuracy: 0.6353
Epoch 38/100
107/107 [==============================] - 111s 1s/step - loss: 1.1178 - accuracy: 0.5622 - val_loss: 0.9786 - val_accuracy: 0.6318
Epoch 39/100
107/107 [==============================] - 112s 1s/step - loss: 1.1308 - accuracy: 0.5760 - val_loss: 0.9958 - val_accuracy: 0.6105
Epoch 40/100
107/107 [==============================] - 111s 1s/step - loss: 1.0793 - accuracy: 0.5872 - val_loss: 1.0060 - val_accuracy: 0.5874
Epoch 41/100
107/107 [==============================] - 107s 1s/step - loss: 1.0607 - accuracy: 0.5975 - val_loss: 0.9178 - val_accuracy: 0.6664
Epoch 42/100
107/107 [==============================] - 106s 990ms/step - loss: 1.0701 - accuracy: 0.5955 - val_loss: 1.0350 - val_accuracy: 0.5847
Epoch 43/100
107/107 [==============================] - 107s 998ms/step - loss: 1.0672 - accuracy: 0.5875 - val_loss: 0.9968 - val_accuracy: 0.5821
Epoch 44/100
107/107 [==============================] - 112s 1s/step - loss: 1.0453 - accuracy: 0.5981 - val_loss: 0.9809 - val_accuracy: 0.6256
Epoch 45/100
107/107 [==============================] - 106s 989ms/step - loss: 1.0821 - accuracy: 0.5863 - val_loss: 1.2063 - val_accuracy: 0.5794
Epoch 46/100
107/107 [==============================] - 108s 1s/step - loss: 1.1146 - accuracy: 0.5784 - val_loss: 1.0019 - val_accuracy: 0.6193
Epoch 47/100
107/107 [==============================] - 113s 1s/step - loss: 1.0658 - accuracy: 0.5801 - val_loss: 0.9914 - val_accuracy: 0.5768
Epoch 48/100
107/107 [==============================] - 115s 1s/step - loss: 1.0377 - accuracy: 0.5972 - val_loss: 0.9133 - val_accuracy: 0.6522
Epoch 49/100
107/107 [==============================] - 110s 1s/step - loss: 1.0814 - accuracy: 0.5955 - val_loss: 0.9343 - val_accuracy: 0.6229
Epoch 50/100
107/107 [==============================] - 161s 2s/step - loss: 1.0106 - accuracy: 0.6046 - val_loss: 0.9386 - val_accuracy: 0.6282
Epoch 51/100
107/107 [==============================] - 191s 2s/step - loss: 1.0262 - accuracy: 0.5963 - val_loss: 0.9286 - val_accuracy: 0.6433
Epoch 52/100
107/107 [==============================] - 192s 2s/step - loss: 1.0086 - accuracy: 0.6181 - val_loss: 0.9220 - val_accuracy: 0.6371
Epoch 53/100
107/107 [==============================] - 190s 2s/step - loss: 0.9961 - accuracy: 0.6126 - val_loss: 0.8873 - val_accuracy: 0.6442
Epoch 54/100
107/107 [==============================] - 191s 2s/step - loss: 1.0047 - accuracy: 0.6131 - val_loss: 0.9051 - val_accuracy: 0.6664
Epoch 55/100
107/107 [==============================] - 190s 2s/step - loss: 1.0002 - accuracy: 0.6072 - val_loss: 0.9096 - val_accuracy: 0.6469
Epoch 56/100
107/107 [==============================] - 193s 2s/step - loss: 0.9826 - accuracy: 0.6379 - val_loss: 0.8885 - val_accuracy: 0.6522
Epoch 57/100
107/107 [==============================] - 190s 2s/step - loss: 0.9753 - accuracy: 0.6226 - val_loss: 0.9831 - val_accuracy: 0.6318
Epoch 58/100
107/107 [==============================] - 189s 2s/step - loss: 0.9459 - accuracy: 0.6323 - val_loss: 0.9388 - val_accuracy: 0.6282
Epoch 59/100
107/107 [==============================] - 188s 2s/step - loss: 0.9572 - accuracy: 0.6429 - val_loss: 0.8805 - val_accuracy: 0.6850
Epoch 60/100
107/107 [==============================] - 187s 2s/step - loss: 0.9690 - accuracy: 0.6320 - val_loss: 0.8379 - val_accuracy: 0.6939
Epoch 61/100
107/107 [==============================] - 189s 2s/step - loss: 0.9390 - accuracy: 0.6293 - val_loss: 0.8530 - val_accuracy: 0.6939
Epoch 62/100
107/107 [==============================] - 188s 2s/step - loss: 0.9348 - accuracy: 0.6509 - val_loss: 0.8234 - val_accuracy: 0.6983
Epoch 63/100
107/107 [==============================] - 27152s 256s/step - loss: 0.9574 - accuracy: 0.6358 - val_loss: 0.8923 - val_accuracy: 0.6735
Epoch 64/100
107/107 [==============================] - 185s 2s/step - loss: 0.9563 - accuracy: 0.6420 - val_loss: 0.8772 - val_accuracy: 0.6628
Epoch 65/100
107/107 [==============================] - 205s 2s/step - loss: 0.9251 - accuracy: 0.6503 - val_loss: 0.8651 - val_accuracy: 0.6832
Epoch 66/100
107/107 [==============================] - 205s 2s/step - loss: 0.9492 - accuracy: 0.6293 - val_loss: 0.8361 - val_accuracy: 0.6832
Epoch 67/100
107/107 [==============================] - 216s 2s/step - loss: 0.9224 - accuracy: 0.6600 - val_loss: 0.8269 - val_accuracy: 0.6948
Epoch 68/100
107/107 [==============================] - 207s 2s/step - loss: 0.9755 - accuracy: 0.6388 - val_loss: 0.9503 - val_accuracy: 0.6344
Epoch 69/100
107/107 [==============================] - 216s 2s/step - loss: 0.8931 - accuracy: 0.6656 - val_loss: 0.8891 - val_accuracy: 0.6602
Epoch 70/100
107/107 [==============================] - 226s 2s/step - loss: 0.8972 - accuracy: 0.6541 - val_loss: 0.8402 - val_accuracy: 0.6752
Epoch 71/100
107/107 [==============================] - 132s 1s/step - loss: 0.9017 - accuracy: 0.6479 - val_loss: 0.8628 - val_accuracy: 0.6717
Epoch 72/100
107/107 [==============================] - 130s 1s/step - loss: 0.8745 - accuracy: 0.6697 - val_loss: 0.9158 - val_accuracy: 0.6495
Epoch 73/100
107/107 [==============================] - 145s 1s/step - loss: 0.8860 - accuracy: 0.6650 - val_loss: 0.8367 - val_accuracy: 0.6735
Epoch 74/100
107/107 [==============================] - 145s 1s/step - loss: 0.8778 - accuracy: 0.6600 - val_loss: 0.8808 - val_accuracy: 0.6735
Epoch 75/100
107/107 [==============================] - 145s 1s/step - loss: 0.9184 - accuracy: 0.6470 - val_loss: 0.8551 - val_accuracy: 0.6664
Epoch 76/100
107/107 [==============================] - 135s 1s/step - loss: 0.8651 - accuracy: 0.6647 - val_loss: 0.8232 - val_accuracy: 0.7028
Epoch 77/100
107/107 [==============================] - 124s 1s/step - loss: 0.8846 - accuracy: 0.6718 - val_loss: 0.7907 - val_accuracy: 0.7036
Epoch 78/100
107/107 [==============================] - 121s 1s/step - loss: 0.8547 - accuracy: 0.6612 - val_loss: 0.8852 - val_accuracy: 0.6619
Epoch 79/100
107/107 [==============================] - 125s 1s/step - loss: 0.8571 - accuracy: 0.6777 - val_loss: 0.8196 - val_accuracy: 0.6948
Epoch 80/100
107/107 [==============================] - 123s 1s/step - loss: 0.8984 - accuracy: 0.6476 - val_loss: 0.8710 - val_accuracy: 0.6504
Epoch 81/100
107/107 [==============================] - 120s 1s/step - loss: 0.8812 - accuracy: 0.6529 - val_loss: 0.8334 - val_accuracy: 0.6868
Epoch 82/100
107/107 [==============================] - 140s 1s/step - loss: 0.8874 - accuracy: 0.6497 - val_loss: 0.9314 - val_accuracy: 0.6424
Epoch 83/100
107/107 [==============================] - 135s 1s/step - loss: 0.8228 - accuracy: 0.6836 - val_loss: 0.8156 - val_accuracy: 0.6939
Epoch 84/100
107/107 [==============================] - 141s 1s/step - loss: 0.8579 - accuracy: 0.6780 - val_loss: 1.1742 - val_accuracy: 0.5510
Epoch 85/100
107/107 [==============================] - 147s 1s/step - loss: 0.8715 - accuracy: 0.6682 - val_loss: 0.8515 - val_accuracy: 0.6699
Epoch 86/100
107/107 [==============================] - 152s 1s/step - loss: 0.8592 - accuracy: 0.6777 - val_loss: 0.8394 - val_accuracy: 0.6690
Epoch 87/100
107/107 [==============================] - 155s 1s/step - loss: 0.8599 - accuracy: 0.6738 - val_loss: 0.9155 - val_accuracy: 0.6477
Epoch 88/100
107/107 [==============================] - 131s 1s/step - loss: 0.8519 - accuracy: 0.6727 - val_loss: 0.7865 - val_accuracy: 0.7090
Epoch 89/100
107/107 [==============================] - 121s 1s/step - loss: 0.8097 - accuracy: 0.6927 - val_loss: 0.9439 - val_accuracy: 0.6380
Epoch 90/100
107/107 [==============================] - 122s 1s/step - loss: 0.8484 - accuracy: 0.6783 - val_loss: 0.7958 - val_accuracy: 0.7036
Epoch 91/100
107/107 [==============================] - 122s 1s/step - loss: 0.8371 - accuracy: 0.6889 - val_loss: 0.8159 - val_accuracy: 0.6788
Epoch 92/100
107/107 [==============================] - 137s 1s/step - loss: 0.8109 - accuracy: 0.6856 - val_loss: 0.7840 - val_accuracy: 0.7063
Epoch 93/100
107/107 [==============================] - 165s 2s/step - loss: 0.8170 - accuracy: 0.6806 - val_loss: 0.8211 - val_accuracy: 0.6699
Epoch 94/100
107/107 [==============================] - 187s 2s/step - loss: 0.8233 - accuracy: 0.6921 - val_loss: 0.8002 - val_accuracy: 0.7098
Epoch 95/100
107/107 [==============================] - 173s 2s/step - loss: 0.8140 - accuracy: 0.7004 - val_loss: 0.8282 - val_accuracy: 0.6850
Epoch 96/100
107/107 [==============================] - 173s 2s/step - loss: 0.8080 - accuracy: 0.7012 - val_loss: 0.8210 - val_accuracy: 0.6921
Epoch 97/100
107/107 [==============================] - 190s 2s/step - loss: 0.8122 - accuracy: 0.6971 - val_loss: 0.8895 - val_accuracy: 0.6655
Epoch 98/100
107/107 [==============================] - 196s 2s/step - loss: 0.8163 - accuracy: 0.7009 - val_loss: 0.8516 - val_accuracy: 0.6593
Epoch 99/100
107/107 [==============================] - 146s 1s/step - loss: 0.7775 - accuracy: 0.6983 - val_loss: 0.8494 - val_accuracy: 0.6948
Epoch 100/100
107/107 [==============================] - 134s 1s/step - loss: 0.8379 - accuracy: 0.6674 - val_loss: 0.8113 - val_accuracy: 0.6939

Produce the learning curves

In [5]:
# Access the training history
history = fit2.history

# Get the training and validation accuracy values
train_acc = history['accuracy']
val_acc = history['val_accuracy']

# Get the training and validation loss values
train_loss = history['loss']
val_loss = history['val_loss']

# Plot the training and validation accuracy
plt.plot(range(1, len(train_acc) + 1), train_acc, 'b-', label='Training Accuracy')
plt.plot(range(1, len(val_acc) + 1), val_acc, 'r-', label='Validation Accuracy')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.title('Training and Validation Accuracy')
plt.legend()
plt.show()

# Plot the training and validation loss
plt.plot(range(1, len(train_loss) + 1), train_loss, 'b-', label='Training Loss')
plt.plot(range(1, len(val_loss) + 1), val_loss, 'r-', label='Validation Loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.title('Training and Validation Loss')
plt.legend()
plt.show()

Get the Accuracy and loss for training and validation data

In [8]:
# Access the training history
history = fit2.history

# Get the training and validation accuracy and loss values at the last epoch
final_train_acc = history['accuracy'][-1]
final_val_acc = history['val_accuracy'][-1]
final_train_loss = history['loss'][-1]
final_val_loss = history['val_loss'][-1]

# Print the metrics at convergence
print("Final Training Accuracy:", final_train_acc)
print("Final Validation Accuracy:", final_val_acc)
print("Final Training Loss:", final_train_loss)
print("Final Validation Loss:", final_val_loss)
Final Training Accuracy: 0.667354166507721
Final Validation Accuracy: 0.6938775777816772
Final Training Loss: 0.8379225134849548
Final Validation Loss: 0.8112552762031555

Load in the images in the test folder

In [18]:
# Define the path to the test folder
test_folder = 'C:/Users/sofiy/vehicle_dataset/Test2'

# Initialize lists to store test images and labels
test_images = []
test_labels = []

# Load and preprocess test images
for class_label in os.listdir(test_folder):
    class_folder = os.path.join(test_folder, class_label)
    for image_file in os.listdir(class_folder):
        image_path = os.path.join(class_folder, image_file)
        image = load_img(image_path, target_size=(64, 64))
        image = img_to_array(image) / 255.0
        test_images.append(image)
        test_labels.append(class_label)

# Convert lists to NumPy arrays
test_images = np.array(test_images)
test_labels = np.array(test_labels)
In [19]:
# Make predictions on the test images
predictions = model2.predict(test_images)
8/8 [==============================] - 0s 19ms/step

Create the confusion matrix and get the precion

In [23]:
# Convert one-hot encoded predictions to class labels
predicted_labels = np.argmax(predictions, axis=1)

# Convert class labels to integers for both test_labels and predicted_labels
label_mapping = {label: idx for idx, label in enumerate(np.unique(test_labels))}
test_labels_int = np.array([label_mapping[label] for label in test_labels])
predicted_labels_int = predicted_labels

# Calculate the confusion matrix and precision
conf_matrix = confusion_matrix(test_labels_int, predicted_labels_int)
precision = precision_score(test_labels_int, predicted_labels_int, average='weighted')

print("Confusion Matrix:")
print(conf_matrix)
print("Precision:", precision)
Confusion Matrix:
[[ 3  0  4  8  5  0]
 [ 0 22  0  1 27  0]
 [ 2  0 41  4  2  1]
 [ 0  1  4 25 14  6]
 [ 0  3  1  2 43  1]
 [ 2  0  4  2  0 11]]
Precision: 0.6411312227211808
In [24]:
# Calculate test accuracy
test_accuracy = accuracy_score(test_labels_int, predicted_labels_int)
print("Test Accuracy:", test_accuracy)
Test Accuracy: 0.606694560669456

Loop through test folder and predict the classes of each image

In [7]:
# Define the path to the test folder
test_folder = 'C:/Users/sofiy/vehicle_dataset/Test'

# Get the class labels from the train generator
class_labels = list(train_generator.class_indices.keys())

# Function to load and preprocess a single image
def preprocess_image(image_path):
    image = load_img(image_path, target_size=(64, 64))
    image = img_to_array(image) / 255.0
    image = np.expand_dims(image, axis=0)
    return image

# Function to plot images with their predicted classes
def plot_images_with_predictions(images, predictions, class_labels, num_images_per_row=5):
    num_images = len(images)
    num_rows = (num_images + num_images_per_row - 1) // num_images_per_row

    plt.figure(figsize=(20, 4 * num_rows))

    for i in range(num_images):
        plt.subplot(num_rows, num_images_per_row, i + 1)
        plt.imshow(images[i])
        plt.axis('off')
        predicted_class = class_labels[np.argmax(predictions[i])]
        plt.title(f"Predicted: {predicted_class}")

    plt.tight_layout()
    plt.show()

# Initialize lists to store images and their predictions
test_images = []
test_predictions = []

# Loop through the images in the test folder and make predictions
for image_file in os.listdir(test_folder):
    image_path = os.path.join(test_folder, image_file)
    image = preprocess_image(image_path)
    prediction = model2.predict(image)
    test_images.append(load_img(image_path))
    test_predictions.append(prediction)

# Convert the list of predictions to a numpy array
test_predictions = np.vstack(test_predictions)

# Plot the images with their predicted classes in rows of 5
plot_images_with_predictions(test_images, test_predictions, class_labels, num_images_per_row=5)
1/1 [==============================] - 0s 85ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 16ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 11ms/step
1/1 [==============================] - 0s 16ms/step
1/1 [==============================] - 0s 20ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 17ms/step
1/1 [==============================] - 0s 16ms/step
1/1 [==============================] - 0s 30ms/step
1/1 [==============================] - 0s 20ms/step
1/1 [==============================] - 0s 25ms/step
1/1 [==============================] - 0s 23ms/step
1/1 [==============================] - 0s 16ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 28ms/step
1/1 [==============================] - 0s 17ms/step
1/1 [==============================] - 0s 33ms/step
1/1 [==============================] - 0s 29ms/step

Create and train the second model using data augumentaion

In [25]:
# Define the batch size
batch_size = 32

# Define the image data generator for the train set with data augmentation
train_datagen_aug = ImageDataGenerator(
    rescale=1.0/255,
    rotation_range=20,
    width_shift_range=0.2,
    height_shift_range=0.2,
    shear_range=0.2,
    zoom_range=0.2,
    horizontal_flip=True
)

# Load the images from the train folder using the augmented data generator
train_generator_aug = train_datagen_aug.flow_from_directory(
    os.path.join("C:/Users/sofiy/vehicle_dataset/Train"),     # path to train data
    target_size=(64, 64),
    batch_size=batch_size,
    class_mode='categorical',
    shuffle=True  # Shuffle the data during training
)

# Define the model with 3 convolutional layers and a dense layer
model = Sequential()
model.add(Conv2D(64, (3, 3), activation='relu', input_shape=(64, 64, 3)))
model.add(MaxPooling2D((2, 2)))
model.add(Conv2D(128, (3, 3), activation='relu'))
model.add(MaxPooling2D((2, 2)))
model.add(Conv2D(256, (3, 3), activation='relu'))
model.add(MaxPooling2D((2, 2)))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dense(6, activation='softmax'))  

# Compile the model
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])

# Train the model using the train generator with data augmentation
fit = model.fit(train_generator_aug, epochs=100, batch_size=batch_size, validation_data=validation_generator)
Found 3394 images belonging to 6 classes.
Epoch 1/100
107/107 [==============================] - 161s 1s/step - loss: 1.4978 - accuracy: 0.4125 - val_loss: 1.3555 - val_accuracy: 0.4676
Epoch 2/100
107/107 [==============================] - 142s 1s/step - loss: 1.2901 - accuracy: 0.5068 - val_loss: 1.5702 - val_accuracy: 0.4277
Epoch 3/100
107/107 [==============================] - 146s 1s/step - loss: 1.1906 - accuracy: 0.5480 - val_loss: 1.0535 - val_accuracy: 0.5980
Epoch 4/100
107/107 [==============================] - 140s 1s/step - loss: 1.1582 - accuracy: 0.5566 - val_loss: 1.3118 - val_accuracy: 0.4499
Epoch 5/100
107/107 [==============================] - 146s 1s/step - loss: 1.0826 - accuracy: 0.5934 - val_loss: 1.0891 - val_accuracy: 0.5670
Epoch 6/100
107/107 [==============================] - 145s 1s/step - loss: 1.0336 - accuracy: 0.6137 - val_loss: 1.0256 - val_accuracy: 0.6016
Epoch 7/100
107/107 [==============================] - 141s 1s/step - loss: 0.9889 - accuracy: 0.6305 - val_loss: 0.9004 - val_accuracy: 0.6646
Epoch 8/100
107/107 [==============================] - 149s 1s/step - loss: 0.9524 - accuracy: 0.6341 - val_loss: 0.7952 - val_accuracy: 0.7125
Epoch 9/100
107/107 [==============================] - 156s 1s/step - loss: 0.9368 - accuracy: 0.6432 - val_loss: 0.8483 - val_accuracy: 0.6868
Epoch 10/100
107/107 [==============================] - 151s 1s/step - loss: 0.9163 - accuracy: 0.6529 - val_loss: 0.8656 - val_accuracy: 0.6806
Epoch 11/100
107/107 [==============================] - 159s 1s/step - loss: 0.9149 - accuracy: 0.6523 - val_loss: 0.8263 - val_accuracy: 0.6841
Epoch 12/100
107/107 [==============================] - 167s 2s/step - loss: 0.8930 - accuracy: 0.6629 - val_loss: 1.0247 - val_accuracy: 0.6105
Epoch 13/100
107/107 [==============================] - 179s 2s/step - loss: 0.9337 - accuracy: 0.6476 - val_loss: 0.7509 - val_accuracy: 0.7258
Epoch 14/100
107/107 [==============================] - 166s 2s/step - loss: 0.8459 - accuracy: 0.6777 - val_loss: 0.7969 - val_accuracy: 0.7045
Epoch 15/100
107/107 [==============================] - 167s 2s/step - loss: 0.8182 - accuracy: 0.6900 - val_loss: 0.8719 - val_accuracy: 0.6681
Epoch 16/100
107/107 [==============================] - 152s 1s/step - loss: 0.7827 - accuracy: 0.7036 - val_loss: 0.7241 - val_accuracy: 0.7311
Epoch 17/100
107/107 [==============================] - 157s 1s/step - loss: 0.7803 - accuracy: 0.7071 - val_loss: 0.7661 - val_accuracy: 0.7090
Epoch 18/100
107/107 [==============================] - 146s 1s/step - loss: 0.7606 - accuracy: 0.7080 - val_loss: 0.7678 - val_accuracy: 0.7178
Epoch 19/100
107/107 [==============================] - 141s 1s/step - loss: 0.8359 - accuracy: 0.7030 - val_loss: 0.7304 - val_accuracy: 0.7320
Epoch 20/100
107/107 [==============================] - 142s 1s/step - loss: 0.7560 - accuracy: 0.7104 - val_loss: 0.7619 - val_accuracy: 0.7232
Epoch 21/100
107/107 [==============================] - 161s 2s/step - loss: 0.7484 - accuracy: 0.7174 - val_loss: 0.7197 - val_accuracy: 0.7400
Epoch 22/100
107/107 [==============================] - 145s 1s/step - loss: 0.7240 - accuracy: 0.7313 - val_loss: 0.7647 - val_accuracy: 0.7347
Epoch 23/100
107/107 [==============================] - 151s 1s/step - loss: 0.6815 - accuracy: 0.7437 - val_loss: 0.7086 - val_accuracy: 0.7489
Epoch 24/100
107/107 [==============================] - 151s 1s/step - loss: 0.6600 - accuracy: 0.7490 - val_loss: 0.7345 - val_accuracy: 0.7418
Epoch 25/100
107/107 [==============================] - 154s 1s/step - loss: 0.6789 - accuracy: 0.7404 - val_loss: 0.7506 - val_accuracy: 0.7143
Epoch 26/100
107/107 [==============================] - 149s 1s/step - loss: 0.6528 - accuracy: 0.7560 - val_loss: 0.6711 - val_accuracy: 0.7524
Epoch 27/100
107/107 [==============================] - 132s 1s/step - loss: 0.6214 - accuracy: 0.7666 - val_loss: 0.7208 - val_accuracy: 0.7391
Epoch 28/100
107/107 [==============================] - 137s 1s/step - loss: 0.6314 - accuracy: 0.7640 - val_loss: 0.7908 - val_accuracy: 0.7205
Epoch 29/100
107/107 [==============================] - 147s 1s/step - loss: 0.6538 - accuracy: 0.7516 - val_loss: 0.7175 - val_accuracy: 0.7267
Epoch 30/100
107/107 [==============================] - 734s 7s/step - loss: 0.6103 - accuracy: 0.7725 - val_loss: 0.8080 - val_accuracy: 0.7187
Epoch 31/100
107/107 [==============================] - 199s 2s/step - loss: 0.5960 - accuracy: 0.7749 - val_loss: 0.6895 - val_accuracy: 0.7604
Epoch 32/100
107/107 [==============================] - 209s 2s/step - loss: 0.5721 - accuracy: 0.7811 - val_loss: 0.7013 - val_accuracy: 0.7471
Epoch 33/100
107/107 [==============================] - 216s 2s/step - loss: 0.6085 - accuracy: 0.7711 - val_loss: 0.7430 - val_accuracy: 0.7524
Epoch 34/100
107/107 [==============================] - 228s 2s/step - loss: 0.5793 - accuracy: 0.7802 - val_loss: 0.7193 - val_accuracy: 0.7578
Epoch 35/100
107/107 [==============================] - 215s 2s/step - loss: 0.5681 - accuracy: 0.7873 - val_loss: 0.7751 - val_accuracy: 0.7285
Epoch 36/100
107/107 [==============================] - 231s 2s/step - loss: 0.5676 - accuracy: 0.7899 - val_loss: 0.7651 - val_accuracy: 0.7489
Epoch 37/100
107/107 [==============================] - 224s 2s/step - loss: 0.5391 - accuracy: 0.8041 - val_loss: 0.6722 - val_accuracy: 0.7675
Epoch 38/100
107/107 [==============================] - 211s 2s/step - loss: 0.5335 - accuracy: 0.8049 - val_loss: 0.9190 - val_accuracy: 0.7356
Epoch 39/100
107/107 [==============================] - 263s 2s/step - loss: 0.5628 - accuracy: 0.7873 - val_loss: 0.7049 - val_accuracy: 0.7551
Epoch 40/100
107/107 [==============================] - 265s 2s/step - loss: 0.5338 - accuracy: 0.7991 - val_loss: 0.7674 - val_accuracy: 0.7338
Epoch 41/100
107/107 [==============================] - 268s 3s/step - loss: 0.5335 - accuracy: 0.8029 - val_loss: 0.8930 - val_accuracy: 0.7178
Epoch 42/100
107/107 [==============================] - 261s 2s/step - loss: 0.5898 - accuracy: 0.7843 - val_loss: 0.7735 - val_accuracy: 0.7338
Epoch 43/100
107/107 [==============================] - 263s 2s/step - loss: 0.5060 - accuracy: 0.8091 - val_loss: 0.7850 - val_accuracy: 0.7400
Epoch 44/100
107/107 [==============================] - 264s 2s/step - loss: 0.4882 - accuracy: 0.8100 - val_loss: 0.7892 - val_accuracy: 0.7445
Epoch 45/100
107/107 [==============================] - 274s 3s/step - loss: 0.4736 - accuracy: 0.8247 - val_loss: 0.6943 - val_accuracy: 0.7640
Epoch 46/100
107/107 [==============================] - 256s 2s/step - loss: 0.4853 - accuracy: 0.8135 - val_loss: 0.7871 - val_accuracy: 0.7391
Epoch 47/100
107/107 [==============================] - 257s 2s/step - loss: 0.4773 - accuracy: 0.8279 - val_loss: 0.7585 - val_accuracy: 0.7524
Epoch 48/100
107/107 [==============================] - 261s 2s/step - loss: 0.4606 - accuracy: 0.8276 - val_loss: 0.7381 - val_accuracy: 0.7560
Epoch 49/100
107/107 [==============================] - 246s 2s/step - loss: 0.4522 - accuracy: 0.8312 - val_loss: 0.6863 - val_accuracy: 0.7542
Epoch 50/100
107/107 [==============================] - 223s 2s/step - loss: 0.4605 - accuracy: 0.8265 - val_loss: 0.7504 - val_accuracy: 0.7533
Epoch 51/100
107/107 [==============================] - 236s 2s/step - loss: 0.4485 - accuracy: 0.8341 - val_loss: 0.8555 - val_accuracy: 0.7480
Epoch 52/100
107/107 [==============================] - 241s 2s/step - loss: 0.4480 - accuracy: 0.8300 - val_loss: 0.7390 - val_accuracy: 0.7649
Epoch 53/100
107/107 [==============================] - 222s 2s/step - loss: 0.4083 - accuracy: 0.8465 - val_loss: 0.7962 - val_accuracy: 0.7507
Epoch 54/100
107/107 [==============================] - 244s 2s/step - loss: 0.4193 - accuracy: 0.8506 - val_loss: 0.7157 - val_accuracy: 0.7595
Epoch 55/100
107/107 [==============================] - 228s 2s/step - loss: 0.4136 - accuracy: 0.8447 - val_loss: 0.7901 - val_accuracy: 0.7569
Epoch 56/100
107/107 [==============================] - 222s 2s/step - loss: 0.4341 - accuracy: 0.8294 - val_loss: 0.8372 - val_accuracy: 0.7303
Epoch 57/100
107/107 [==============================] - 235s 2s/step - loss: 0.4524 - accuracy: 0.8329 - val_loss: 0.8023 - val_accuracy: 0.7356
Epoch 58/100
107/107 [==============================] - 247s 2s/step - loss: 0.4153 - accuracy: 0.8500 - val_loss: 0.7314 - val_accuracy: 0.7533
Epoch 59/100
107/107 [==============================] - 220s 2s/step - loss: 0.4400 - accuracy: 0.8388 - val_loss: 0.7312 - val_accuracy: 0.7657
Epoch 60/100
107/107 [==============================] - 236s 2s/step - loss: 0.4136 - accuracy: 0.8459 - val_loss: 0.7776 - val_accuracy: 0.7569
Epoch 61/100
107/107 [==============================] - 220s 2s/step - loss: 0.3978 - accuracy: 0.8600 - val_loss: 0.8269 - val_accuracy: 0.7516
Epoch 62/100
107/107 [==============================] - 197s 2s/step - loss: 0.3908 - accuracy: 0.8571 - val_loss: 0.7582 - val_accuracy: 0.7657
Epoch 63/100
107/107 [==============================] - 204s 2s/step - loss: 0.3866 - accuracy: 0.8562 - val_loss: 0.8015 - val_accuracy: 0.7436
Epoch 64/100
107/107 [==============================] - 182s 2s/step - loss: 0.3741 - accuracy: 0.8639 - val_loss: 0.7748 - val_accuracy: 0.7649
Epoch 65/100
107/107 [==============================] - 176s 2s/step - loss: 0.3719 - accuracy: 0.8600 - val_loss: 0.7788 - val_accuracy: 0.7595
Epoch 66/100
107/107 [==============================] - 165s 2s/step - loss: 0.4081 - accuracy: 0.8424 - val_loss: 0.8957 - val_accuracy: 0.7551
Epoch 67/100
107/107 [==============================] - 160s 1s/step - loss: 0.4385 - accuracy: 0.8427 - val_loss: 0.7989 - val_accuracy: 0.7533
Epoch 68/100
107/107 [==============================] - 162s 2s/step - loss: 0.3934 - accuracy: 0.8527 - val_loss: 0.7484 - val_accuracy: 0.7897
Epoch 69/100
107/107 [==============================] - 154s 1s/step - loss: 0.3740 - accuracy: 0.8565 - val_loss: 0.7851 - val_accuracy: 0.7782
Epoch 70/100
107/107 [==============================] - 171s 2s/step - loss: 0.3999 - accuracy: 0.8583 - val_loss: 0.8055 - val_accuracy: 0.7578
Epoch 71/100
107/107 [==============================] - 165s 2s/step - loss: 0.3447 - accuracy: 0.8721 - val_loss: 0.7870 - val_accuracy: 0.7622
Epoch 72/100
107/107 [==============================] - 162s 2s/step - loss: 0.3570 - accuracy: 0.8651 - val_loss: 0.8736 - val_accuracy: 0.7551
Epoch 73/100
107/107 [==============================] - 165s 2s/step - loss: 0.3536 - accuracy: 0.8683 - val_loss: 0.8346 - val_accuracy: 0.7675
Epoch 74/100
107/107 [==============================] - 159s 1s/step - loss: 0.3473 - accuracy: 0.8715 - val_loss: 0.8205 - val_accuracy: 0.7622
Epoch 75/100
107/107 [==============================] - 145s 1s/step - loss: 0.3531 - accuracy: 0.8724 - val_loss: 0.8394 - val_accuracy: 0.7436
Epoch 76/100
107/107 [==============================] - 138s 1s/step - loss: 0.3309 - accuracy: 0.8807 - val_loss: 0.8993 - val_accuracy: 0.7409
Epoch 77/100
107/107 [==============================] - 126s 1s/step - loss: 0.3326 - accuracy: 0.8783 - val_loss: 0.8164 - val_accuracy: 0.7613
Epoch 78/100
107/107 [==============================] - 132s 1s/step - loss: 0.3686 - accuracy: 0.8683 - val_loss: 0.7968 - val_accuracy: 0.7728
Epoch 79/100
107/107 [==============================] - 216s 2s/step - loss: 0.3193 - accuracy: 0.8830 - val_loss: 0.9071 - val_accuracy: 0.7303
Epoch 80/100
107/107 [==============================] - 216s 2s/step - loss: 0.3406 - accuracy: 0.8727 - val_loss: 0.8046 - val_accuracy: 0.7720
Epoch 81/100
107/107 [==============================] - 218s 2s/step - loss: 0.3414 - accuracy: 0.8736 - val_loss: 0.9369 - val_accuracy: 0.7542
Epoch 82/100
107/107 [==============================] - 222s 2s/step - loss: 0.3216 - accuracy: 0.8842 - val_loss: 0.9257 - val_accuracy: 0.7507
Epoch 83/100
107/107 [==============================] - 222s 2s/step - loss: 0.3357 - accuracy: 0.8780 - val_loss: 0.8487 - val_accuracy: 0.7622
Epoch 84/100
107/107 [==============================] - 249s 2s/step - loss: 0.3348 - accuracy: 0.8821 - val_loss: 0.8693 - val_accuracy: 0.7640
Epoch 85/100
107/107 [==============================] - 221s 2s/step - loss: 0.3187 - accuracy: 0.8836 - val_loss: 0.8610 - val_accuracy: 0.7640
Epoch 86/100
107/107 [==============================] - 222s 2s/step - loss: 0.2765 - accuracy: 0.9022 - val_loss: 0.9551 - val_accuracy: 0.7524
Epoch 87/100
107/107 [==============================] - 222s 2s/step - loss: 0.3197 - accuracy: 0.8824 - val_loss: 0.9699 - val_accuracy: 0.7391
Epoch 88/100
107/107 [==============================] - 221s 2s/step - loss: 0.3030 - accuracy: 0.8898 - val_loss: 0.9054 - val_accuracy: 0.7533
Epoch 89/100
107/107 [==============================] - 220s 2s/step - loss: 0.2926 - accuracy: 0.8925 - val_loss: 0.8357 - val_accuracy: 0.7560
Epoch 90/100
107/107 [==============================] - 221s 2s/step - loss: 0.2908 - accuracy: 0.8930 - val_loss: 0.8318 - val_accuracy: 0.7622
Epoch 91/100
107/107 [==============================] - 221s 2s/step - loss: 0.2842 - accuracy: 0.8951 - val_loss: 0.9950 - val_accuracy: 0.7471
Epoch 92/100
107/107 [==============================] - 221s 2s/step - loss: 0.2939 - accuracy: 0.8975 - val_loss: 0.9205 - val_accuracy: 0.7569
Epoch 93/100
107/107 [==============================] - 220s 2s/step - loss: 0.2847 - accuracy: 0.8910 - val_loss: 0.9866 - val_accuracy: 0.7232
Epoch 94/100
107/107 [==============================] - 220s 2s/step - loss: 0.3082 - accuracy: 0.8901 - val_loss: 0.9796 - val_accuracy: 0.7347
Epoch 95/100
107/107 [==============================] - 218s 2s/step - loss: 0.3320 - accuracy: 0.8789 - val_loss: 0.9235 - val_accuracy: 0.7649
Epoch 96/100
107/107 [==============================] - 224s 2s/step - loss: 0.2980 - accuracy: 0.8919 - val_loss: 0.8781 - val_accuracy: 0.7737
Epoch 97/100
107/107 [==============================] - 221s 2s/step - loss: 0.2808 - accuracy: 0.8984 - val_loss: 0.9094 - val_accuracy: 0.7569
Epoch 98/100
107/107 [==============================] - 220s 2s/step - loss: 0.2814 - accuracy: 0.8951 - val_loss: 0.9186 - val_accuracy: 0.7560
Epoch 99/100
107/107 [==============================] - 220s 2s/step - loss: 0.2799 - accuracy: 0.9057 - val_loss: 0.8632 - val_accuracy: 0.7728
Epoch 100/100
107/107 [==============================] - 218s 2s/step - loss: 0.2602 - accuracy: 0.9045 - val_loss: 1.0301 - val_accuracy: 0.7338

Produce the learning curves

In [26]:
# Access the training history
history = fit.history

# Get the training and validation accuracy values
train_acc = history['accuracy']
val_acc = history['val_accuracy']

# Get the training and validation loss values
train_loss = history['loss']
val_loss = history['val_loss']

# Plot the training and validation accuracy
plt.plot(range(1, len(train_acc) + 1), train_acc, 'b-', label='Training Accuracy')
plt.plot(range(1, len(val_acc) + 1), val_acc, 'r-', label='Validation Accuracy')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.title('Training and Validation Accuracy')
plt.legend()
plt.show()

# Plot the training and validation loss
plt.plot(range(1, len(train_loss) + 1), train_loss, 'b-', label='Training Loss')
plt.plot(range(1, len(val_loss) + 1), val_loss, 'r-', label='Validation Loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.title('Training and Validation Loss')
plt.legend()
plt.show()

Get the Accuracy and loss for training and validation data

In [27]:
# Access the training history
history = fit.history

# Get the training and validation accuracy and loss values at the last epoch
final_train_acc = history['accuracy'][-1]
final_val_acc = history['val_accuracy'][-1]
final_train_loss = history['loss'][-1]
final_val_loss = history['val_loss'][-1]

# Print the metrics at convergence
print("Final Training Accuracy:", final_train_acc)
print("Final Validation Accuracy:", final_val_acc)
print("Final Training Loss:", final_train_loss)
print("Final Validation Loss:", final_val_loss)
Final Training Accuracy: 0.9045374393463135
Final Validation Accuracy: 0.7338065505027771
Final Training Loss: 0.260242760181427
Final Validation Loss: 1.0300655364990234
In [28]:
# Make predictions on the test images
predictions = model.predict(test_images)
8/8 [==============================] - 0s 39ms/step

Create the confusion matrix and get the precision

In [29]:
# Convert one-hot encoded predictions to class labels
predicted_labels = np.argmax(predictions, axis=1)

# Convert class labels to integers for both test_labels and predicted_labels
label_mapping = {label: idx for idx, label in enumerate(np.unique(test_labels))}
test_labels_int = np.array([label_mapping[label] for label in test_labels])
predicted_labels_int = predicted_labels

# Calculate the confusion matrix and precision
conf_matrix = confusion_matrix(test_labels_int, predicted_labels_int)
precision = precision_score(test_labels_int, predicted_labels_int, average='weighted')

print("Confusion Matrix:")
print(conf_matrix)
print("Precision:", precision)
Confusion Matrix:
[[ 6  0  4  9  1  0]
 [ 0 41  0  2  7  0]
 [ 2  1 30 13  0  4]
 [ 1  3  3 38  3  2]
 [ 0 13  2  5 30  0]
 [ 1  0  0  5  0 13]]
Precision: 0.6769063631393004
In [30]:
# Calculate test accuracy
test_accuracy = accuracy_score(test_labels_int, predicted_labels_int)
print("Test Accuracy:", test_accuracy)
Test Accuracy: 0.6610878661087866

Loop through test folder and predict the classes of each image

In [31]:
# Define the path to the test folder
test_folder = 'C:/Users/sofiy/vehicle_dataset/Test'

# Get the class labels from the train generator
class_labels = list(train_generator.class_indices.keys())

# Function to load and preprocess a single image
def preprocess_image(image_path):
    image = load_img(image_path, target_size=(64, 64))
    image = img_to_array(image) / 255.0
    image = np.expand_dims(image, axis=0)
    return image

# Function to plot images with their predicted classes
def plot_images_with_predictions(images, predictions, class_labels, num_images_per_row=5):
    num_images = len(images)
    num_rows = (num_images + num_images_per_row - 1) // num_images_per_row

    plt.figure(figsize=(20, 4 * num_rows))

    for i in range(num_images):
        plt.subplot(num_rows, num_images_per_row, i + 1)
        plt.imshow(images[i])
        plt.axis('off')
        predicted_class = class_labels[np.argmax(predictions[i])]
        plt.title(f"Predicted: {predicted_class}")

    plt.tight_layout()
    plt.show()

# Initialize lists to store images and their predictions
test_images = []
test_predictions = []

# Loop through the images in the test folder and make predictions
for image_file in os.listdir(test_folder):
    image_path = os.path.join(test_folder, image_file)
    image = preprocess_image(image_path)
    prediction = model.predict(image)
    test_images.append(load_img(image_path))
    test_predictions.append(prediction)

# Convert the list of predictions to a numpy array
test_predictions = np.vstack(test_predictions)

# Plot the images with their predicted classes in rows of 5
plot_images_with_predictions(test_images, test_predictions, class_labels, num_images_per_row=5)
1/1 [==============================] - 0s 21ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 21ms/step
1/1 [==============================] - 0s 23ms/step
1/1 [==============================] - 0s 24ms/step
1/1 [==============================] - 0s 51ms/step
1/1 [==============================] - 0s 38ms/step
1/1 [==============================] - 0s 13ms/step
1/1 [==============================] - 0s 23ms/step
1/1 [==============================] - 0s 27ms/step
1/1 [==============================] - 0s 21ms/step
1/1 [==============================] - 0s 31ms/step
1/1 [==============================] - 0s 18ms/step
1/1 [==============================] - 0s 12ms/step
1/1 [==============================] - 0s 20ms/step
1/1 [==============================] - 0s 26ms/step
1/1 [==============================] - 0s 21ms/step
1/1 [==============================] - 0s 22ms/step
1/1 [==============================] - 0s 32ms/step
In [ ]: